import numpy as np
import tensorflow as tf
import os
import matplotlib.pyplot as plt
import cv2
import numpy as np
from sklearn.model_selection import train_test_split
import warnings
warnings.filterwarnings("ignore")
#PATH ALLOCATION
data_dir=os.path.join('asl_alphabet_data')
classes=sorted(os.listdir(data_dir))
print(classes)
print(len(classes))
['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'del', 'nothing', 'space'] 29
# Load and preprocess data from subfolders
def load_data(folder):
images = []
labels = []
class_names = sorted(os.listdir(folder))
for class_index, class_name in enumerate(class_names):
class_path = os.path.join(folder, class_name)
for image_name in os.listdir(class_path):
image_path = os.path.join(class_path, image_name)
image = cv2.imread(image_path)
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
image = cv2.resize(image, (224,224))
images.append(image)
labels.append(class_index)
return images, labels
# Load and preprocess image data
images, labels = load_data('asl_alphabet_data')
# Split data into train and test sets
train_images, test_images, train_labels, test_labels = train_test_split(
images, labels, test_size=0.2, random_state=42, stratify=labels
)
# Convert lists to numpy arrays
train_images = np.array(train_images)
train_labels = np.array(train_labels)
test_images = np.array(test_images)
test_labels = np.array(test_labels)
# Normalize pixel values to the range [0, 1]
train_images = train_images.astype('float32') / 255.0
test_images = test_images.astype('float32') / 255.0
# Print shapes for verification
print("Train images shape:", train_images.shape)
print("Train labels shape:", train_labels.shape)
print("Test images shape:", test_images.shape)
print("Test labels shape:", test_labels.shape)
Train images shape: (2320, 224, 224, 3) Train labels shape: (2320,) Test images shape: (580, 224, 224, 3) Test labels shape: (580,)
fig,ax=plt.subplots(nrows=4,ncols=4,figsize=(20,20))
ax=ax.flatten()
for i,j in enumerate(train_images[:16]):
ax[i].imshow(j) #already in minimal scaled range
ax[i].title.set_text(train_labels[i])
# MobileNet
from tensorflow.keras.applications import MobileNet
from tensorflow.keras.layers import Dense, Dropout
from tensorflow.keras.models import Model
# Neural network architecture
pretrainedModel = MobileNet(
input_shape=(224, 224, 3),
include_top=False,
weights='imagenet',
pooling='avg'
)
pretrainedModel.trainable = False
inputs = pretrainedModel.input
x = Dense(128, activation='relu')(pretrainedModel.output)
x = Dropout(0.5)(x) # Adding dropout layer with 50% dropout rate
x = Dense(128, activation='relu')(x)
x = Dropout(0.5)(x) # Adding dropout layer with 50% dropout rate
outputs = Dense(29, activation='softmax')(x)
model = Model(inputs=inputs, outputs=outputs)
# Compile the model with regularization
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
#TRAINING
tensor_call=tf.keras.callbacks.TensorBoard(log_dir='logs')
tensor_call
#using logs & call backs we can manage the learning phase at any instance
#fit the model
history = model.fit(train_images, train_labels, epochs=50, callbacks=[tensor_call])
Epoch 1/50 73/73 [==============================] - 43s 535ms/step - loss: 3.3482 - accuracy: 0.0668 Epoch 2/50 73/73 [==============================] - 40s 551ms/step - loss: 2.8563 - accuracy: 0.1871 Epoch 3/50 73/73 [==============================] - 40s 546ms/step - loss: 2.3665 - accuracy: 0.3039 Epoch 4/50 73/73 [==============================] - 40s 546ms/step - loss: 1.9272 - accuracy: 0.4099 Epoch 5/50 73/73 [==============================] - 39s 538ms/step - loss: 1.6815 - accuracy: 0.4716 Epoch 6/50 73/73 [==============================] - 42s 575ms/step - loss: 1.4188 - accuracy: 0.5491 Epoch 7/50 73/73 [==============================] - 43s 585ms/step - loss: 1.2793 - accuracy: 0.5733 Epoch 8/50 73/73 [==============================] - 39s 537ms/step - loss: 1.1311 - accuracy: 0.6237 Epoch 9/50 73/73 [==============================] - 40s 547ms/step - loss: 1.1379 - accuracy: 0.6159 Epoch 10/50 73/73 [==============================] - 40s 546ms/step - loss: 0.9650 - accuracy: 0.6711 Epoch 11/50 73/73 [==============================] - 40s 550ms/step - loss: 0.9873 - accuracy: 0.6707 Epoch 12/50 73/73 [==============================] - 40s 553ms/step - loss: 0.8926 - accuracy: 0.6948 Epoch 13/50 73/73 [==============================] - 41s 563ms/step - loss: 0.8150 - accuracy: 0.7185 Epoch 14/50 73/73 [==============================] - 40s 551ms/step - loss: 0.7902 - accuracy: 0.7409 Epoch 15/50 73/73 [==============================] - 40s 554ms/step - loss: 0.7314 - accuracy: 0.7478 Epoch 16/50 73/73 [==============================] - 41s 565ms/step - loss: 0.7251 - accuracy: 0.7470 Epoch 17/50 73/73 [==============================] - 45s 615ms/step - loss: 0.6689 - accuracy: 0.7806 Epoch 18/50 73/73 [==============================] - 43s 588ms/step - loss: 0.6458 - accuracy: 0.7767 Epoch 19/50 73/73 [==============================] - 44s 597ms/step - loss: 0.6449 - accuracy: 0.7832 Epoch 20/50 73/73 [==============================] - 43s 585ms/step - loss: 0.6132 - accuracy: 0.7836 Epoch 21/50 73/73 [==============================] - 49s 666ms/step - loss: 0.6168 - accuracy: 0.7810 Epoch 22/50 73/73 [==============================] - 43s 590ms/step - loss: 0.5793 - accuracy: 0.7991 Epoch 23/50 73/73 [==============================] - 45s 619ms/step - loss: 0.5294 - accuracy: 0.8177 Epoch 24/50 73/73 [==============================] - 46s 630ms/step - loss: 0.5489 - accuracy: 0.8129 Epoch 25/50 73/73 [==============================] - 43s 583ms/step - loss: 0.5452 - accuracy: 0.8078 Epoch 26/50 73/73 [==============================] - 43s 589ms/step - loss: 0.5060 - accuracy: 0.8284 Epoch 27/50 73/73 [==============================] - 42s 570ms/step - loss: 0.4922 - accuracy: 0.8293 Epoch 28/50 73/73 [==============================] - 40s 548ms/step - loss: 0.4854 - accuracy: 0.8358 Epoch 29/50 73/73 [==============================] - 41s 556ms/step - loss: 0.4798 - accuracy: 0.8379 Epoch 30/50 73/73 [==============================] - 40s 551ms/step - loss: 0.4305 - accuracy: 0.8556 Epoch 31/50 73/73 [==============================] - 41s 560ms/step - loss: 0.4457 - accuracy: 0.8401 Epoch 32/50 73/73 [==============================] - 41s 564ms/step - loss: 0.4324 - accuracy: 0.8547 Epoch 33/50 73/73 [==============================] - 42s 577ms/step - loss: 0.4222 - accuracy: 0.8543 Epoch 34/50 73/73 [==============================] - 43s 590ms/step - loss: 0.4001 - accuracy: 0.8634 Epoch 35/50 73/73 [==============================] - 40s 553ms/step - loss: 0.4326 - accuracy: 0.8513 Epoch 36/50 73/73 [==============================] - 42s 568ms/step - loss: 0.4249 - accuracy: 0.8509 Epoch 37/50 73/73 [==============================] - 41s 563ms/step - loss: 0.4217 - accuracy: 0.8534 Epoch 38/50 73/73 [==============================] - 42s 579ms/step - loss: 0.4034 - accuracy: 0.8582 Epoch 39/50 73/73 [==============================] - 41s 556ms/step - loss: 0.3860 - accuracy: 0.8634 Epoch 40/50 73/73 [==============================] - 44s 608ms/step - loss: 0.3884 - accuracy: 0.8634 Epoch 41/50 73/73 [==============================] - 47s 649ms/step - loss: 0.3718 - accuracy: 0.8677 Epoch 42/50 73/73 [==============================] - 41s 562ms/step - loss: 0.3801 - accuracy: 0.8707 Epoch 43/50 73/73 [==============================] - 42s 579ms/step - loss: 0.3646 - accuracy: 0.8733 Epoch 44/50 73/73 [==============================] - 43s 584ms/step - loss: 0.3527 - accuracy: 0.8819 Epoch 45/50 73/73 [==============================] - 42s 581ms/step - loss: 0.3506 - accuracy: 0.8832 Epoch 46/50 73/73 [==============================] - 41s 567ms/step - loss: 0.3346 - accuracy: 0.8836 Epoch 47/50 73/73 [==============================] - 45s 613ms/step - loss: 0.3626 - accuracy: 0.8703 Epoch 48/50 73/73 [==============================] - 43s 590ms/step - loss: 0.3585 - accuracy: 0.8759 Epoch 49/50 73/73 [==============================] - 42s 572ms/step - loss: 0.3387 - accuracy: 0.8806 Epoch 50/50 73/73 [==============================] - 41s 568ms/step - loss: 0.3410 - accuracy: 0.8853
import matplotlib.pyplot as plt
# Plot training and validation accuracy
plt.plot(history.history['accuracy'], label='Training Accuracy',color='green')
plt.xlabel('Epoch')
plt.ylabel('Accuracy')
plt.title('Training Accuracy')
plt.legend()
plt.show()
# Plot training and validation loss
plt.plot(history.history['loss'], label='Training Loss')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.title('Training Loss')
plt.legend()
plt.show()
from sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score
# Assuming you have already trained your model and obtained predictions
predictions = model.predict(test_images)
predicted_labels = np.argmax(predictions, axis=1)
# Calculate accuracy
accuracy = accuracy_score(test_labels, predicted_labels)
print("Accuracy:", accuracy*100,"%")
# Calculate precision
precision = precision_score(test_labels, predicted_labels, average='weighted')
print("Precision:", precision*100, "%")
# Calculate recall
recall = recall_score(test_labels, predicted_labels, average='weighted')
print("Recall:", recall*100, "%")
# Calculate F1 score
f1 = f1_score(test_labels, predicted_labels, average='weighted')
print("F1 Score:", f1*100,"%")
19/19 [==============================] - 9s 443ms/step Accuracy: 98.9655172413793 % Precision: 98.9810733730879 % Recall: 98.9655172413793 % F1 Score: 98.96508593733152 %
# Define figure size
plt.figure(figsize=(20, 40)) # Increase the height of the figure
# Plot test images along with original and predicted labels
for i in range(32):
plt.subplot(16, 2, i + 1) # Increase the number of rows to accommodate larger height
plt.xticks([])
plt.yticks([])
plt.grid(False)
plt.imshow(test_images[i])
plt.xlabel(f"True: {classes[test_labels[i]]}\nPredicted: {classes[predicted_labels[i]]}")
plt.tight_layout() # Adjust layout to prevent overlap of subplots
plt.show()
# Save model
model.save('sign_language_MAIN_1.h5')